%matplotlib inline
%load_ext autoreload
%autoreload 2
import logging
logging.basicConfig(level=logging.ERROR)
logging.root.setLevel('INFO')
# Os
import pathlib
import sys
import gc
import os
import io
import json
import yaml
from marshmallow import pprint
# Data display
import qgrid
import IPython
# Scientific libs
import numpy as np
import pandas as pd
import scipy
import scipy.stats
# Geoprocissing libs
import geopandas as gpd
import rasterio as rio
import rasterio.crs
import fiona as fio
import fiona.crs
import shapely.geometry as sg
# Plotting
import matplotlib.pyplot as plt
from cycler import cycler
# EWS Library
assert os.path.isdir(os.environ["EWS_LIB_PATH"])
if not os.environ["EWS_LIB_PATH"] in sys.path:
sys.path.append(os.environ["EWS_LIB_PATH"])
from core.archive.git import get_last_commit
print(get_last_commit())
import core.utils.scitools as EWSSciTools
from core.docker import from_win_path_to_container as norm_pathname
from core.io.yaml import load_file, yaml, Dumper
from collections import namedtuple, OrderedDict, defaultdict
from core import init_notebook
from core.utils.ipython import display_end_of_script, nbconvert_html
paths = [os.path.join(os.environ['EWS_LIB_PATH'], x) for x in ["core", "wind"]]
import core.utils.io_utils
libSize = core.utils.io_utils.naturalsize(np.array([core.utils.io_utils.get_size(start_path=x) for x in paths]).sum())
print("EWS Library size: ", libSize)
del libSize, paths
def side_by_side(*args):
#https://pandas.pydata.org/pandas-docs/stable/user_guide/style.html
html_str = ''
def process(x):
if hasattr(x, "style"):
return x.style.set_table_attributes("style='display:inline'")
elif hasattr(x, "set_table_attributes"):
return x.set_table_attributes("style='display:inline'")
else:
return x
import pandas.io.formats.style
for df in args:
if isinstance(df, (pd.DataFrame, pd.io.formats.style.Styler)):
html_str += process(df)._repr_html_()
elif isinstance(df, tuple):
html_str += process(df[0]).set_caption(df[1])._repr_html_()
from core.utils.html import minify
html_str = minify(html_str, engine="html_minify")
IPython.display.display_html(html_str,raw=True)
def create_plot(**kws):
from core.plot.eval_plot.eval_plot import EvalPlot
from core.export.plot_base import PlotExporterBase
logging.getLogger("core.plot.eval_plot.eval_plot").setLevel(logging.INFO)
EWSTheme = PlotExporterBase.default_themes()["EWSTheme"]
plot = EvalPlot(
config={
'pre_plot_hooks': ['{{ figure }}.add_subplot(111)'],
#'despine': dict(enable=True, top=False, right=False, left=False, bottom=False),
**kws
},
theme=EWSTheme).render()
ax = plot.figure.axes[0]
return plot, ax
init_notebook()
IPython.display.HTML('''<script>
code_show = true;
function code_toggle() {
if (code_show){
$('div.input').hide();
} else {
$('div.input').show();
}
code_show = !code_show
}
</script>
<button type="button" onclick="code_toggle()"><b>TOGGLE CODE CELLS</b></button>''')
from core.docker import from_container_path_to_win
print(from_container_path_to_win(os.getcwd()))
from core.utils.io_utils import make_path
WINDPRO_PATH = make_path(norm_pathname(r'T:\EWS_Projects\Gral_Gramm\askervein\windpro'))
if not WINDPRO_PATH.is_dir():
WINDPRO_PATH.mkdir(parents=True, exist_ok=True)
BASIS_PATH = WINDPRO_PATH.joinpath("basis")
if not BASIS_PATH.is_dir():
BASIS_PATH.mkdir(parents=True, exist_ok=True)
MAPS_PATH = BASIS_PATH.joinpath("karten")
if not MAPS_PATH.is_dir():
MAPS_PATH.mkdir(parents=True, exist_ok=True)
DTM_PATH = BASIS_PATH.joinpath("höhenmodell")
if not DTM_PATH.is_dir():
DTM_PATH.mkdir(parents=True, exist_ok=True)
ROUGH_PATH = BASIS_PATH.joinpath("rauigkeit")
if not ROUGH_PATH.is_dir():
ROUGH_PATH.mkdir(parents=True, exist_ok=True)
COORDS_PATH = BASIS_PATH.joinpath("koordinaten")
if not COORDS_PATH.is_dir():
COORDS_PATH.mkdir(parents=True, exist_ok=True)
list(BASIS_PATH.glob("*"))
import pathlib
DATA_PATH = pathlib.Path(norm_pathname(r"T:\EWS_Projects\Gral_Gramm\askervein\input_data"))
DATA_PATH.is_dir()
import shapely.geometry
csv_fname = DATA_PATH.joinpath("Askervein_devc_loc.csv")
gdf = pd.read_csv(csv_fname)
gdf =gdf[["Name", "AGL"]].assign(geometry=gdf.iloc[:, -2:].apply(shapely.geometry.Point, axis=1))
gdf = gpd.GeoDataFrame(gdf, crs=fio.crs.from_epsg(27700))
gdf.to_file("Askervein_locs.shp")
turbines_df = gdf.copy()
#gdf = gdf.to_crs(epsg=4326)
gdf
gdf.to_leaflet(display=True);
center_pt = gdf.geometry.buffer(25000).unary_union.centroid
list(map(int, (center_pt.x, center_pt.y)))
EPSG_CODE = 27700
ANALYSIS_CFG_str = rf'''
projection:
epsg: {EPSG_CODE}
proj4s: {rio.crs.CRS.from_epsg(EPSG_CODE).to_proj4()}
crs: {rio.crs.CRS.from_epsg(EPSG_CODE).to_dict()!r}
name: {rio.crs.CRS.from_epsg(EPSG_CODE).to_wkt().split('"')[1].split('"')[0]}
domains:
items:
- domain_name: small_domain
buffer_distance: 5000
dtm_resolution: 5
- domain_name: large_domain
buffer_distance: 10000
dtm_resolution: 50
dtm:
path: {str(DTM_PATH)}
items:
- domain_name: large_domain
parts:
- name: SRTM
filename: S:\SRTM_Data\all_srtms.vrt
- domain_name: small_domain
parts:
- name: SRTM
filename: S:\SRTM_Data\all_srtms.vrt
roughness:
path: {str(ROUGH_PATH)}
items:
- domain_name: large_domain
- domain_name: small_domain
maps:
path: {str(MAPS_PATH)}
items:
- domain_name: large_domain
mpp: 15
server_name: 'Google Satellite'
filename: 'google_satellite_large'
- domain_name: large_domain
mpp: 15
server_name: 'ARCGIS World_Shaded_Relief'
filename: 'shaded_relief_large'
- domain_name: small_domain
mpp: 8
server_name: 'Google Satellite'
filename: 'google_satellite_small'
'''
ANALYSIS_CFG = yaml.safe_load(io.StringIO(ANALYSIS_CFG_str))
ANALYSIS_CFG["projection"] = namedtuple("Configuration", ANALYSIS_CFG["projection"].keys())(*ANALYSIS_CFG["projection"].values())
ANALYSIS_CFG = namedtuple("Configuration", ANALYSIS_CFG.keys())(*ANALYSIS_CFG.values())
print(ANALYSIS_CFG_str)
gdf = turbines_df.to_crs(ANALYSIS_CFG.projection.crs).set_index("Name").copy()
geom = gdf.geometry
domains_dict = dict()
for domain_cfg in ANALYSIS_CFG.domains.get("items", []):
domain_name = domain_cfg["domain_name"]
print(domain_name)
r = float(domain_cfg["dtm_resolution"])
buffer_distance = float(domain_cfg["buffer_distance"])
excluded_names = domain_cfg.get("excluded_names", [])
geom = gdf.drop(excluded_names, errors="ignore", axis=0).geometry
b = np.array(geom.buffer(buffer_distance + 50000).unary_union.buffer(-50000).bounds)
download_shape = sg.box(*np.column_stack((r * np.floor(b[:2] / r), r * np.ceil(b[2:] / r))).T.flatten())
gdf.loc[domain_name, "Type"] = "Domain"
gdf.loc[domain_name, "geometry"] = download_shape
domains_dict[domain_name] = {
"buffer_distance": buffer_distance,
"dtm_resolution": r,
"shape": download_shape,
"bounds": download_shape.bounds,
}
del geom, b, r, domain_name, buffer_distance, download_shape
gdf.reset_index().to_leaflet(buffer=100)[0]
from core.gis.raster.raster_base import RasterBase
import tempfile
import core.gis.raster.ops as RasterOps
from core.utils.system import execute_in_terminal
import shutil
dtm_dict = dict()
for r_config in ANALYSIS_CFG.dtm.get("items", []):
domain_name = r_config["domain_name"]
domain_cfg = domains_dict[domain_name]
resolution = domain_cfg["dtm_resolution"]
bounds = domain_cfg["bounds"]
raster_config = {
"name": domain_name,
"resampling": {
"active": True,
"method": "bilinear",
"resolution": resolution,
"bounds": list(bounds),
"projection": {
"proj4s": ANALYSIS_CFG.projection.proj4s,
"name": ANALYSIS_CFG.projection.name
}
},
"parts": r_config["parts"]
}
pprint(raster_config)
raster = RasterBase(config=raster_config)
raster.read()
dtm_dict[domain_name] = raster
plt.imshow(raster.data, extent=raster.extent)
path = make_path(ANALYSIS_CFG.dtm.get("path", os.getcwd()))
dtm_fname = path.joinpath(f"{domain_name}.tif")
grd_fname = path.joinpath(f"{domain_name}.asc.txt")
with tempfile.NamedTemporaryFile(delete=False) as t_file:
RasterOps.data_to_geotiff(
t_file.name, raster.data.astype("float32"), raster.transform,
profile={"dtype": "float32", "crs": raster.crs, "nodata":-999}
)
shutil.move(t_file.name, dtm_fname)
assert dtm_fname.exists()
execute_in_terminal(f"gdal_translate -of AAIGrid '{dtm_fname}' '{grd_fname}'", verbose=True)
#dtm_fname.unlink()
fig, axL = plt.subplots(1, len(ANALYSIS_CFG.dtm.get("items", [])), figsize=(16, 12) )
axL = np.array(axL).flatten()
rdict = dict()
for i, r_config in enumerate(ANALYSIS_CFG.dtm.get("items", [])):
ax = axL[i]
domain_name = r_config["domain_name"]
path = make_path(ANALYSIS_CFG.dtm.get("path", os.getcwd()))
dtm_fname = path.joinpath(f"{domain_name}.tif")
with rio.open(str(dtm_fname)) as src:
data = src.read(1)
bounds = src.bounds
extent = src.bounds[::2] + src.bounds[1::2]
cmap = "gist_earth"
norm = None
rdict[domain_name] = data
im = ax.imshow(data, cmap=cmap, norm=norm, extent=extent)
plt.colorbar(im, ax=ax, orientation="horizontal")
if True:
ax.plot(
[pt.x for pt in gdf.query("Type != 'Domain'").geometry],
[pt.y for pt in gdf.query("Type != 'Domain'").geometry],
mfc="w", color="k", marker="o", ls="none"
)
ax.set(xlim=extent[:2], ylim=extent[2:], aspect="equal", title=domain_name)
fig.tight_layout();
import rasterio.features
import requests, urllib3
import shutil
from core.utils.system import execute_in_terminal
CLASS_LUT = r"""ID Land Cover Type EMD Roughness Length
1 Continuous urban fabric 1.1.1 z0=0.50000
2 Discontinuous urban fabric 1.1.2 z0=0.40000
3 Industrial or commercial units 1.2.1 z0=0.70000
4 Road and rail networks and associated land 1.2.2 z0=0.10000
5 Port areas 1.2.3 z0=0.50000
6 Airports 1.2.6 z0=0.03000
7 Mineral extraction sites 1.3.1 z0=0.10000
8 Dump sites 1.3.2 z0=0.10000
9 Construction sites 1.3.3 z0=0.30000
10 Green urban areas 1.4.1 z0=0.40000
11 Sport and leisure facilities 1.4.2 z0=0.50000
12 Non-irrigated arable land 2.1.1 z0=0.05600
13 Permanently irrigated land 2.1.2 z0=0.05600
14 Rice fields 2.1.3 z0=0.01840
15 Vineyards 2.2.1 z0=0.30000
16 Fruit trees and berry plantations 2.2.2 z0=0.40000
17 Olive groves 2.2.3 z0=0.40000
18 Pastures 2.3.1 z0=0.03600
19 Annual crops associated with permanent crops 2.4.1 z0=0.05600
20 Complex cultivation patterns 2.4.2 z0=0.05600
21 Land principally occupied by agriculture, with significant areas of natural vegetation 2.4.3 z0=0.05600
22 Agro-forestry areas 2.4.4 z0=0.50000
23 Broad-leaved forest 3.1.1 z0=0.50000
24 Coniferous forest 3.1.2 z0=0.50000
25 Mixed forest 3.1.3 z0=0.50000
26 Natural grassland 3.2.1 z0=0.05600
27 Moors and heathland 3.2.2 z0=0.06000
28 Sclerophyllous vegetation 3.2.3 z0=0.05600
29 Sparse coniferous forest, cc. 10-30% 3.2.4 z0=0.40000
30 Beaches, dunes, and sand plains 3.3.1 z0=0.01000
31 Bare rock 3.3.2 z0=0.05000
32 Sparsely vegetated areas 3.3.3 z0=0.20000
33 Burnt areas 3.3.4 z0=0.20000
34 Glaciers and perpetual snow 3.3.5 z0=0.20000
35 Inland marshes 4.1.1 z0=0.05000
36 Peat bogs 4.1.2 z0=0.01840
37 Salt marshes 4.2.1 z0=0.03480
38 Salines 4.2.2 z0=0.03000
39 Intertidal flats 4.2.3 z0=0.00050
40 River 5.1.1 z0=0.00000
41 Lake 5.1.2 z0=0.00000
42 Coastal lagoons 5.2.1 z0=0.00000
43 Estuaries 5.2.2 z0=0.00000
44 Sea and ocean 5.2.3 z0=0.00000
45 NO DATA z0=-1.00000"""
CLASS_LUT = pd.read_csv(io.StringIO(CLASS_LUT), sep="\t")
CLASS_LUT['EMD Roughness Length'] = [_[1] for _ in CLASS_LUT['EMD Roughness Length'].str.split("z0=")]
CLASS_LUT['z0'] = CLASS_LUT['EMD Roughness Length'].astype("float")
del CLASS_LUT['EMD Roughness Length']
CLASS_LUT["ID"] = CLASS_LUT["ID"].astype("int")
CLASS_LUT["Code_18"] = [_[-1] for _ in CLASS_LUT['Land Cover Type'].str.split(" ")]
CLASS_LUT["Code_18"] = CLASS_LUT["Code_18"].str.replace(".", "").replace("DATA", "-1").astype("int")
CLASS_LUT.idisplay;
CLASS_LUT_DF = CLASS_LUT.set_index("Code_18")
CLASS_LUT = CLASS_LUT.set_index("Code_18")["z0"].to_dict()
label_dict = CLASS_LUT_DF.apply(
lambda row: f"{row['Land Cover Type']} - z0={row.z0}", axis=1
).to_dict()
label_dict
for r_config in ANALYSIS_CFG.roughness.get("items", []):
domain_name = r_config["domain_name"]
domain_cfg = domains_dict[domain_name]
#resolution = domain_cfg["dtm_resolution"]
#bounds = domain_cfg["bounds"]
raster = dtm_dict[domain_name]
resolution = raster.resolution[0]
bounds = raster.bounds
raster.read()
path = make_path(ANALYSIS_CFG.roughness.get("path", os.getcwd()))
base_url = "https://image.discomap.eea.europa.eu/arcgis/rest/services/Corine/CLC2018_WM/MapServer/0/query?"
base_kws = {
"geometry": ",".join(map(str, bounds)),
"geometryType": "esriGeometryEnvelope",
"inSR": ANALYSIS_CFG.projection.epsg,
"spatialRel": "esriSpatialRelIntersects",
"returnTrueCurves": True,
"outSR": ANALYSIS_CFG.projection.epsg,
"returnZ": False,
"returnM": False,
"f": "json",
"text": "",
"time": "",
"relationParam": "",
"outFields": "",
"maxAllowableOffset": "",
"geometryPrecision": "",
"orderByFields": "",
"groupByFieldsForStatistics": "",
"outStatistics": "",
"gdbVersion": "",
"returnDistinctValues": False,
"resultOffset": "",
"resultRecordCount": "",
"queryByDistance": "",
"returnExtentsOnly": False,
"datumTransformation": "",
"parameterValues": "",
"rangeValues": "",
}
url = base_url + urllib3.request.urlencode({
"returnIdsOnly": False,
"returnCountOnly": True,
"returnGeometry": False,
**base_kws
})
#print(url)
n_expected = requests.api.get(url).json()["count"]
print(n_expected)
url = base_url + urllib3.request.urlencode({
"returnIdsOnly": True,
"returnCountOnly": False,
"returnGeometry": False,
**base_kws
})
ids = requests.api.get(url).json()["objectIds"]
print(len(ids))
assert len(ids) == n_expected, "Number of ids doesn't match!!!"
MAX_RECORDS_COUNT = 1000
n, r = divmod(len(ids), MAX_RECORDS_COUNT)
n = n+1 if r > 0 else n
dl_list = []
for cpt in range(n):
dl_list += [ids[cpt*MAX_RECORDS_COUNT:(cpt+1)*MAX_RECORDS_COUNT]]
print([len(x) for x in dl_list])
c_gdf = list()
for idList in dl_list:
s = ",".join(map(str, idList))
url = base_url + urllib3.request.urlencode({
#"where": f"OBJECTID in ({s})",
"where": f"(OBJECTID >= {idList[0]}) AND (OBJECTID <= {idList[-1]})",
#"objectIds": ",".join(map(str, idList)),
"returnIdsOnly": False,
"returnCountOnly": False,
"returnGeometry": True,
**base_kws
})
print(url)
c_gdf += [gpd.read_file(url)]
len(c_gdf)
c_gdf = gpd.GeoDataFrame(pd.concat(c_gdf, axis=0).astype({"Code_18": "int"}), crs= ANALYSIS_CFG.projection.crs)
c_gdf.info()
assert c_gdf.index.size == n_expected, "Number of ids doesn't match!!!"
c_gdf["geometry"] = c_gdf.geometry.intersection(domain_cfg["shape"].buffer(4*resolution))
c_gdf = c_gdf[c_gdf.geometry.area > 0]
c_gdf["RoughnessL"] = c_gdf.Code_18.astype("int").map(CLASS_LUT.get).replace([None], np.nan)
c_gdf["Label"] = c_gdf.Code_18.astype("int").map(label_dict.get).replace([None], np.nan)
print(c_gdf["RoughnessL"].isnull().sum())
c_gdf = c_gdf[c_gdf.RoughnessL >= 0][["RoughnessL", "geometry", "Code_18", "Label"]]
import requests
legend_json = requests.api.get("https://image.discomap.eea.europa.eu/arcgis/rest/services/Corine/CLC2018_WM/MapServer/0?f=pjson").json()
legend = pd.DataFrame.from_records(legend_json['drawingInfo']['renderer']['uniqueValueInfos'])[["value", "label"]].astype({"value": "int"}).set_index("value")["label"].to_dict()
cdict = {int(v["value"]): tuple(np.array(v['symbol']["color"]) / 255.0) for v in legend_json['drawingInfo']['renderer']['uniqueValueInfos']}
legend
def style_function(feature):
import matplotlib as mpl
c = mpl.colors.rgb2hex(cdict[feature['properties']['Code_18']][:3])
return {'color': c, 'fillColor': c, 'fillOpacity': 0.4}
fig, ax = plt.subplots(1, 1, figsize=(12, 12) )
ax = c_gdf.plot(ax=ax, column="Code_18")
ax.set(xlim=bounds[::2], ylim=bounds[1::2], aspect="equal")
c_raster = rio.features.rasterize(
c_gdf[["geometry", "Code_18"]].apply(tuple, axis=1),
out_shape=raster.data.shape,
transform=raster.transform,
fill=1,
all_touched=True,
dtype=np.int32
)
fig, ax = plt.subplots(1, 1, figsize=(12, 12) )
im = ax.imshow(c_raster, extent=raster.extent)
ax.set(xlim=bounds[::2], ylim=bounds[1::2], aspect="equal")
shp_fname = path.joinpath(f"{domain_name}_landcover_clc2018.shp")
c_gdf.to_file(shp_fname, driver="ESRI Shapefile")
clc_fname = path.joinpath(f"{domain_name}_landcover_clc2018.tif")
grd_fname = path.joinpath(f"{domain_name}_landcover_clc2018.asc.txt")
m = c_gdf.to_leaflet(style_function=style_function)[0]
m.save(str(path.joinpath(f"{domain_name}_landcover_clc2018.html")))
with tempfile.NamedTemporaryFile(delete=False) as t_file:
RasterOps.data_to_geotiff(
t_file.name, np.ma.masked_less(c_raster, 0).astype("int32"), raster.transform,
profile={"dtype": "int32", "crs": raster.crs, "nodata":-999}
)
shutil.move(t_file.name, clc_fname)
assert clc_fname.exists()
execute_in_terminal(f"gdal_translate -of AAIGrid '{clc_fname}' '{grd_fname}'", verbose=True)
def get_corine_cmap():
import matplotlib as mpl
res = list()
for cfg in requests.get(
"https://image.discomap.eea.europa.eu/arcgis/rest/services/Corine/CLC2018_WM/MapServer/0?f=pjson"
).json()["drawingInfo"]["renderer"]["uniqueValueInfos"]:
value = cfg["value"]
label = cfg["label"]
color = cfg["symbol"]["color"][:-1]
res.append((label, value, color))
res = pd.DataFrame(
res, columns=["Label", "Code", "RGBColor"]).set_index("Code")
res["HEXColor"] = res.RGBColor.apply(
lambda ls: mpl.colors.to_hex(np.array(ls) / 255)).str.upper()
cmap = mpl.colors.ListedColormap(res["HEXColor"].tolist())
norm = mpl.colors.BoundaryNorm(pd.Float64Index(res.index).tolist(), cmap.N)
return cmap, norm
corine_cmap, corine_norm = get_corine_cmap()
fig, axL = plt.subplots(1, len(ANALYSIS_CFG.roughness.get("items", [])), figsize=(16, 12) )
axL = np.array(axL).flatten()
for i, r_config in enumerate(ANALYSIS_CFG.roughness.get("items", [])):
ax = axL[i]
domain_name = r_config["domain_name"]
path = make_path(ANALYSIS_CFG.roughness.get("path", os.getcwd()))
dtm_fname = path.joinpath(f"{domain_name}_landcover_clc2018.asc.txt")
with rio.open(str(dtm_fname)) as src:
data = src.read(1)
bounds = src.bounds
extent = src.bounds[::2] + src.bounds[1::2]
im = ax.imshow(data, cmap=corine_cmap, norm=corine_norm, extent=extent)
plt.colorbar(im, ax=ax, orientation="horizontal")
if True:
ax.plot(
[pt.x for pt in gdf.query("Type != 'Domain'").geometry],
[pt.y for pt in gdf.query("Type != 'Domain'").geometry],
mfc="w", color="k", marker="o", ls="none"
)
ax.set(xlim=extent[:2], ylim=extent[2:], aspect="equal", title=domain_name)
fig.tight_layout();
import core.gis.raster.ops as RasterOps
idx = gdf.query("Type != 'Domain'").index
gdf.loc[idx, "TerrainHeight"] = RasterOps.interpolate_on_data(
gdf.loc[idx].geometry.apply(lambda pt: pd.Series([pt.x, pt.y])),
dtm_dict["small_domain"].data,
dtm_dict["small_domain"].transform,
method="linear",
bounds_error=False,
fill_value=np.nan
).round(1)
gdf.TerrainHeight
from core.wmts import WMTSDownloader
print(WMTSDownloader.list_available_servers())
from core.wmts.amap_downloader import AMAPDownloader
from core.wmts import WMTSDownloader
from core.docker import from_container_path_to_win
for item_cfg in ANALYSIS_CFG.maps.get("items", []):
path = make_path(ANALYSIS_CFG.maps.get("path", os.getcwd()))
pprint(item_cfg)
domain_name = item_cfg["domain_name"]
domain_shape = domains_dict[domain_name]["shape"]
mpp = item_cfg["mpp"]
server_name = item_cfg["server_name"]
dl = None
if server_name == "AMAP":
dl = AMAPDownloader(
resolution=mpp,
output_path=str(path),
jpeg_quality=80,
n_threads=32,
optimize_download=False,
out_epsg=ANALYSIS_CFG.projection.epsg,
download_shape=domain_shape,
download_shape_epsg_code=ANALYSIS_CFG.projection.epsg,
big_tif=True,
parallel=True,
prefix=None,
tag=item_cfg["filename"],
jpeg_extension="jpeg"
)
else:
print(mpp)
dl = WMTSDownloader(
server_name=server_name,
resolution=mpp,
output_path=str(path),
force_resolution=False,
jpeg_quality=80,
n_threads=32,
optimize_download=False,
max_attempts=10,
out_epsg=ANALYSIS_CFG.projection.epsg,
download_shape=domain_shape,
download_shape_epsg_code=ANALYSIS_CFG.projection.epsg,
big_tif=True,
parallel=True,
jpeg_extension="jpeg"
)
dl.tag_name = item_cfg["filename"]
#if os.path.isfile(dl.jpeg_fname):
# continue
try:
print(dl.download_df.index.size, getattr(dl, "selected_zoom", None), getattr(dl, "selected_resolution", None))
dl.run_all(create_kmz=False, create_tif=False, create_jpeg=True, create_png=False, clean_files=True, create_pyramids=False, verbose=False)
except:
raise
continue
fig, ax = plt.subplots(1, 1, figsize=(20, 20))
nrg = np.dstack((rio.open(dl.jpeg_fname).read(1), rio.open(dl.jpeg_fname).read(2), rio.open(dl.jpeg_fname).read(3)))
ax.imshow(nrg)
ax.set_aspect("equal")
IPython.display.display(fig)
plt.close(fig)
from wind.gramm_gral.utils import change_world_files
path = make_path(ANALYSIS_CFG.maps.get("path", os.getcwd()))
list(path.glob("*.jpegw"))
def change_world_files(maps_path):
maps_path = make_path(maps_path)
assert maps_path.exists()
n = 0
for (ext, wld) in [("tif", "tfw"), ("jpeg", "jpegw"), ("png", "pngw")]:
for fname in maps_path.glob("*." + wld):
picture_file = fname.parent.joinpath(fname.stem + "." + ext)
fname_new = fname.parent.joinpath(fname.stem + "." + "jpgw")
if not picture_file.exists():
continue
n += 1
picture_file = from_container_path_to_win(str(picture_file))
lines = fname.open("r").readlines()[:6]
lines.append(picture_file)
lines = "".join(lines)
with fname_new.open("w") as fout:
fout.write(lines)
return n
change_world_files(path)
SIMULATION_PATH = pathlib.Path(norm_pathname(r"T:\EWS_Projects\Gral_Gramm\askervein\gramm"))
assert SIMULATION_PATH.exists()
pprint(list(SIMULATION_PATH.glob("*")))
from wind.gramm_gral.gramm.simulation import GrammSimulation
simulation = GrammSimulation(SIMULATION_PATH, epsg_code=ANALYSIS_CFG.projection.epsg)
print(simulation.projection_name)
simulation
gdf.query("Type != 'Domain'")
print(simulation.write_receptors(gdf.query("Type != 'Domain'").assign(Height=10, Diameter=10), height_name="Height", diameter_name="Diameter"))
simulation.mesh.read_mesh()
simulation.mesh.write_terrain_data(simulation.project_path.joinpath("terrain_data.vti"))
simulation.mesh.write_vtk_grid(simulation.project_path.joinpath("mesh.vts"))
print(simulation.number_of_cells, simulation.domain_width, simulation.domain_height)
print(simulation.receptors_gdf.distance(simulation.domain.values[0].exterior).min())
print(simulation.latitude)
print(simulation.update_config_files_with_latitude())
simulation.read_computation_info()
print(simulation.uses_meteopgt)
print(simulation.z0)
print(simulation.n_smoothed_cells)
print(simulation.write_steady_state_file)
simulation.n_smoothed_cells
rasters = simulation.topography_raster, simulation.land_use_raster, simulation.tpi_slope_min_raster, simulation.landform_class_raster
n = int(len([_ for _ in rasters if _ is not None]) / 2)
fig, axL = plt.subplots(n, 2, figsize=(8 * 2, n * (8*simulation.aspect_ratio + 0.5) ))
axL = np.array(axL).flatten()
simulation._rasters_dict = dict()
bounds = simulation.bounds
extent = simulation.extent
for ax, raster in zip(axL, rasters):
cmap = "gist_earth" if raster.name == "topography" else "viridis"
norm = None
if raster.name == "landform_class":
import matplotlib as mpl
from core.eplot.pandas_integration import EchartsAxis
cmap = mpl.colors.ListedColormap(EchartsAxis.create_palette("tab10_10"))
norm = mpl.colors.BoundaryNorm(np.arange(1, 11, 1), cmap.N)
im = ax.imshow(raster.data, cmap=cmap, norm=norm, extent=raster.extent)
plt.colorbar(im, ax=ax, orientation="horizontal").set_label(raster.name)
if True:
simulation.n_smoothed_cells = 20
ax.plot(
simulation.receptors_gdf.X,
simulation.receptors_gdf.Y,
mfc="w", color="k", marker="o", ls="none"
)
smoothing_distance = simulation.dx * simulation.n_smoothed_cells
style = dict(color="white", ls="--")
ax.axvline(bounds[0] + smoothing_distance, **style)
ax.axvline(bounds[2] - smoothing_distance, **style)
ax.axhline(bounds[1] + smoothing_distance, **style)
ax.axhline(bounds[3] - smoothing_distance, **style)
if True:
distance = 10000
style = dict(color="red", ls="--")
ax.axvline(bounds[0] + distance, **style)
ax.axvline(bounds[2] - distance, **style)
ax.axhline(bounds[1] + distance, **style)
ax.axhline(bounds[3] - distance, **style)
ax.set(xlim=extent[:2], ylim=extent[2:], aspect="equal", title=raster.name)
fig.tight_layout();
simulation.write_meteopgt(n_sectors=36, wind_speeds=[5], stability_classes=[4], height=10)
simulation.test_cases_df
simulation.number_of_cells
simulation.clean_computation_files()
simulation.mesh.read_mesh()
simulation.clean_computation_files()
simulation.clean_jobs_files()
simulation.update_config_files_with_latitude()
simulation.set_use_flat(False)
for i, l in enumerate(simulation.computation_path.joinpath("IIN.dat").open("r").readlines()):
print(f"{i}:{l.strip()}")
test_cases_df = simulation.test_cases_df
print(test_cases_df.index.size)
jobs_df = simulation.write_job_files(n_procs=12, n_threads=1)
#jobs_df = simulation.write_job_files(n_procs=5, n_threads=5, query=None)
print(test_cases_df.index.size, jobs_df.index.size, jobs_df.index[0])
jobs_df
display_end_of_script(raise_error=False, tz='Europe/Vienna')
IPython.display.HTML('''<script>
code_show = true;
function code_toggle() {
if (code_show){
$('div.input').hide();
} else {
$('div.input').show();
}
code_show = !code_show
}
</script>
<button id="__toggler__" style="position: fixed;right: 5px;top: 5px;z-index: 1000;" type="button" onclick="code_toggle()"><b>TOGGLE CODE CELLS</b></button>''')
nbconvert_html('00_setup_windpro_project.ipynb', toc=True)
!jupyter nbconvert --to=html_toc --ExtractOutputPreprocessor.extract_output_types=[] --ExecutePreprocessor.store_widget_state=True '00_setup_windpro_project.ipynb'